home *** CD-ROM | disk | FTP | other *** search
- ## -*-Tcl-*-
- # ###################################################################
- # HTML mode - tools for editing HTML documents
- #
- # FILE: "htmlUtils.tcl"
- # created: 99-07-20 17.52.36
- # last update: 00-12-29 21.55.43
- # Author: Johan Linde
- # E-mail: <alpha_www_tools@go.to>
- # www: <http://go.to/alpha_www_tools>
- #
- # Version: 3.0
- #
- # Copyright 1996-2001 by Johan Linde
- #
- # This program is free software; you can redistribute it and/or modify
- # it under the terms of the GNU General Public License as published by
- # the Free Software Foundation; either version 2 of the License, or
- # (at your option) any later version.
- #
- # This program is distributed in the hope that it will be useful,
- # but WITHOUT ANY WARRANTY; without even the implied warranty of
- # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- # GNU General Public License for more details.
- #
- # You should have received a copy of the GNU General Public License
- # along with this program; if not, write to the Free Software
- # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
- #
- # ###################################################################
- ##
-
- #===============================================================================
- # This file contains the procs for the URLs and Windows submenus plus procs
- # for general caches.
- #===============================================================================
-
- #===============================================================================
- # ◊◊◊◊ General caches ◊◊◊◊ #
- #===============================================================================
-
- proc html::SaveCache {cache text} {
- global html::PrefsFolder htmlVersion
- file::ensureDirExists ${html::PrefsFolder}
- set fid [open [file join ${html::PrefsFolder} $cache] w]
- if {[info exists htmlVersion]} {puts $fid "#$htmlVersion"} else {puts $fid "#1.0"}
- puts $fid $text
- close $fid
- }
-
- proc html::ReadCache {cache {level #0}} {
- global html::PrefsFolder htmlVersion
- if {![file exists [file join ${html::PrefsFolder} $cache]]} {error "No cache."}
- set fid [open [file join ${html::PrefsFolder} $cache] r]
- gets $fid version
- if {![regexp {^#[0-9]+\.[0-9]+$} $version] || $version != "#$htmlVersion"} {
- close $fid
- html::DeleteCache $cache
- error "Wrong version."
- }
- close $fid
- eval {uplevel $level [list source [file join ${html::PrefsFolder} $cache]]}
- }
-
- proc html::DeleteCache {cache} {
- global html::PrefsFolder
- catch {file delete [file join ${html::PrefsFolder} $cache]}
- }
-
-
- #===============================================================================
- # ◊◊◊◊ URL and Window Caches ◊◊◊◊ #
- #===============================================================================
-
- # Adds a URL or window given as input to cache
- proc html::AddToCache {cache newurl} {
- global HTMLmodeVars htmlModeIsLoaded
-
- if {$cache == "windows" && [lsearch -exact {_self _top _parent _blank} $newurl] >= 0} {return}
- set URLs $HTMLmodeVars($cache)
-
- if {[string length $newurl] && ![lcontains URLs $newurl]} {
- set URLs [lsort [lappend URLs $newurl]]
- set HTMLmodeVars($cache) $URLs
- prefs::modifiedModeVar $cache HTML
- if {[set l [llength $URLs]] == 1 && [info exists htmlModeIsLoaded]} {html::Enable$cache on}
- if {$l > 75 && [expr {$l/10 == $l/10.0}]} {
- alertnote "The $cache cache is very large. Consider cleaning it up."
- }
- }
- }
-
- proc html::CleanUpCache {cache} {
- global HTMLmodeVars
-
- set URLs $HTMLmodeVars($cache)
-
- if {![llength $URLs]} {
- return
- }
- set urlnumber [llength $URLs]
- set screenHeight [lindex [getMainDevice] 3]
- set maxLines [expr {($screenHeight - 160) / 20}]
- set pages [expr {($urlnumber - 1) / $maxLines}]
- set thispage 0
- for {set i 0} {$i < $urlnumber} {incr i} {
- lappend URLsToSave 1
- }
- set thisbox $URLsToSave
- while {1} {
- if {$thispage < $pages} {
- set thisurlnumber $maxLines
- } else {
- set thisurlnumber [expr {($urlnumber - 1 ) % $maxLines + 1}]
- }
- set height [expr {75 + $thisurlnumber * 20}]
- set box "-w 440 -h $height -b OK 20 [expr {$height - 30}] 85 [expr {$height - 10}] \
- -b Cancel 100 [expr {$height - 30}] 165 [expr {$height - 10}] \
- -b {Uncheck all} 180 [expr {$height - 30}] 265 [expr {$height - 10}] \
- -t {Uncheck the $cache you want to remove} 10 10 440 30 "
- if {$thispage < $pages} {
- lappend box -b "Next>>" 360 [expr {$height - 30}] 425 [expr {$height - 10}]
- }
- if {$thispage > 0} {
- lappend box -b "<<Prev" 280 [expr {$height - 30}] 345 [expr {$height - 10}]
- }
-
- set hpos 30
- set thisURLs [lrange $URLs [expr {$thispage * $maxLines}] \
- [expr {$thispage * $maxLines + $maxLines - 1}]]
- set i 0
- foreach url $thisURLs {
- lappend box -c $url [lindex $thisbox $i] 10 $hpos 430 [expr {$hpos + 15}]
- incr i
- incr hpos 20
- }
- set thisbox [eval [concat dialog $box]]
- if {[lindex $thisbox 1]} {
- # cancel
- return
- } elseif {[lindex $thisbox 2]} {
- # uncheck all
- set thisbox {}
- for {set i 0} {$i < [llength $thisbox]} {incr i} {
- lappend thisbox 0
- }
- } else {
- if {$pages == 0} {
- set ll 3
- } elseif {$thispage == 0 || $thispage == $pages} {
- set ll 4
- } else {
- set ll 5
- }
- set URLsToSave [eval [concat lreplace [list $URLsToSave] [expr {$thispage * $maxLines}] \
- [expr {$thispage * $maxLines + $maxLines - 1}] [lrange $thisbox $ll end]]]
- if {[lindex $thisbox 0]} {
- # OK
- break
- } elseif {$thispage < $pages && [lindex $thisbox 3]} {
- # more
- incr thispage 1
- set thisbox [lrange $URLsToSave [expr {$thispage * $maxLines}] \
- [expr {$thispage * $maxLines + $maxLines - 1}]]
- } else {
- # back
- incr thispage -1
- set thisbox [lrange $URLsToSave [expr {$thispage * $maxLines}] \
- [expr {$thispage * $maxLines + $maxLines - 1}]]
- }
- }
- }
- set newurls {}
- for {set i 0} {$i < $urlnumber} {incr i} {
- if {[lindex $URLsToSave $i]} {
- lappend newurls [lindex $URLs $i]
- }
- }
- set HTMLmodeVars($cache) $newurls
- prefs::modifiedModeVar $cache HTML
- if {![llength $newurls]} {html::Enable$cache off}
- }
-
- proc html::SelScrapToURL {sel msg1 msg2} {
- set newurl [html::URLunEscape [string trim [eval get$sel]]]
- # Convert tabs and returns.
- if {[regexp {[\t\r\n]} $newurl]} {
- alertnote "$msg1 contains tabs or returns. It will not be added to the URL cache."
- return
- }
- if {[string length $newurl]} {
- html::AddToCache URLs $newurl
- message "$newurl added to URLs."
- } else {
- beep
- message $msg2
- }
- }
-
- proc html::AddSelection {} {
- html::SelScrapToURL Select Selection "No selection!"
- }
-
- proc html::AddClipboard {} {
- html::SelScrapToURL Scrap Clipboard "Clipboard empty!"
- }
-
- proc html::ClearCache {cache} {
- global HTMLmodeVars
- if {[llength $HTMLmodeVars($cache)] && [askyesno "Remove all $cache from [string range $cache 0 [expr {[string length $cache] - 2}]] cache?"] == "yes"} {
- set HTMLmodeVars($cache) {}
- prefs::modifiedModeVar $cache HTML
- html::Enable$cache off
- }
- }
-
- # Imports all URLs in a file to the cache.
- proc html::Import {} {
- global HTMLmodeVars
- set urls $HTMLmodeVars(URLs)
-
- if {[catch {getfile "Import URLs from:"} fil] || ![html::IsTextFile $fil alertnote]} {return}
- set fid [open $fil r]
- set filecont " [read $fid]"
- close $fid
- if {[llength $urls]} {
- set cl [askyesno -c "Clear URL cache before importing?"]
- if {$cl == "cancel"} {
- return
- } elseif {$cl == "yes"} {
- set urls {}
- }
- }
-
- set exp1 "\[ \\t\\n\\r\]+[html::URLregexp]"
- set exp2 {[ \t\r\n]+(url)\([ \t\r\n]*("[^"]+"|'[^']+'|[^ \t\n\r\)]+)[ \t\r\n]*\)}
- for {set i1 1} {$i1 < 3} {incr i1} {
- set fcont $filecont
- set exp [set exp$i1]
- while {[regexp -nocase -indices $exp $fcont a b url]} {
- set link [html::URLunEscape [string trim [string range $fcont [lindex $url 0] [lindex $url 1]] {"'}]]
- set fcont [string range $fcont [lindex $url 1] end]
- if {![lcontains urls $link]} {
- lappend urls $link
- }
- }
- }
- set HTMLmodeVars(URLs) [lsort $urls]
- prefs::modifiedModeVar URLs HTML
- html::EnableURLs [expr {[llength $HTMLmodeVars(URLs)] > 0}]
- message "URLs imported."
- }
-
- # Export URLs in cache to a file.
- proc html::Export {} {
- global HTMLmodeVars
- if {![llength $HTMLmodeVars(URLs)]} {
- return
- }
- foreach url $HTMLmodeVars(URLs) {
- lappend out "HREF=\"$url\""
- }
- if {![catch {putfile "Export URL cache to:" "URL cache"} fil]} {
- set fid [open $fil w]
- puts $fid [join $out "\n"]
- close $fid
- message "URLs exported."
- }
- }
-
- # Add all files in a folder to URL cache.
- proc html::AddFolder {} {
- global HTMLmodeVars file::separator
- if {[catch {html::GetDir "Folder to cache:"} folder]} {return}
- set path ""
- foreach hp $HTMLmodeVars(homePages) {
- if {[string match [file join [lindex $hp 0] *] [file join $folder " "]]} {
- set path [string range $folder [expr {[string length [lindex $hp 0]] +1}] end]
- regsub -all ${file::separator} $path {/} path
- if {[string length $path]} {append path /}
- }
- }
- set val [dialog -w 350 -h 80 -t "Path:" 10 10 60 30 -e $path 70 10 340 25 \
- -b OK 20 50 85 70 -b Cancel 110 50 175 70]
- if {[lindex $val 2]} {return}
- set path [string trim [lindex $val 0]]
- if {[string length $path]} {set path "[string trimright $path /]/"}
- set urls $HTMLmodeVars(URLs)
- if {[llength $urls]} {
- set cl [askyesno -c "Clear URL cache first?"]
- if {$cl == "cancel"} {
- return
- } elseif {$cl == "yes"} {
- set urls {}
- }
- }
-
- foreach fil [glob -nocomplain -dir $folder *] {
- set name [file tail $fil]
- if {![file isdirectory $fil] && ![lcontains urls "$path$name"]} {
- lappend urls "$path$name"
- }
- }
- set HTMLmodeVars(URLs) [lsort $urls]
- prefs::modifiedModeVar URLs HTML
- html::EnableURLs [expr {[llength $HTMLmodeVars(URLs)] > 0}]
- message "Files added to URL cache."
- }
-
-